{ localUrl: '../page/1gs.html', arbitalUrl: 'https://arbital.com/p/1gs', rawJsonUrl: '../raw/1gs.json', likeableId: '434', likeableType: 'page', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], pageId: '1gs', edit: '1', editSummary: '', prevEdit: '0', currentEdit: '1', wasPublished: 'true', type: 'comment', title: '"Re: simulating a hostile su..."', clickbait: '', textLength: '1506', alias: '1gs', externalUrl: '', sortChildrenBy: 'recentFirst', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'PaulChristiano', editCreatedAt: '2015-12-29 23:16:51', pageCreatorId: 'PaulChristiano', pageCreatedAt: '2015-12-29 23:16:51', seeDomainId: '0', editDomainId: 'EliezerYudkowsky', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'true', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '219', text: 'Re: simulating a hostile superintelligence:\n\nI find this concern really unconcerning. \n\nSome points:\n\n* This is only really a problem if our own AI development, on Earth, is going so slowly that "having your AI speculate about what aliens might do" is not only the most effective way to develop a powerful AI, it is *way* more effective than what we were doing anyway. But it looks like "do AI development super slowly" is already a dead end for a bunch of other reasons, so we don't really need to talk about this particular bizarre reason. I guess you aren't yet convinced that this is a dead end, but I do hope to convince you at some point.\n* At the point where such massive amounts of internal computing power are being deployed, it seems implausible that an AI system won't be thinking about how to think. At that point, the concern is not about the internal robustness of our system, but instead about the whether the AI is well-calibrated about its own internal robustness. The latter problem seems like one that we essentially have to solve anyway).\n\nI think that there is a higher burden of proof for advancing concerns that AI researchers will dismiss out of hand as crazy, and that we should probably only do it for concerns that are way more solid than this one. Otherwise (1) it will become impossible to advance real concerns that sound crazy, if a pattern is established that crazy-sounding concerns actually are crazy, (2) people interested in AI safety will be roundly dismissed as crazy.', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '2', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '2016-02-21 14:54:42', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'PaulChristiano' ], childIds: [], parentIds: [ 'distant_SIs' ], commentIds: [ '1h5' ], questionIds: [], tagIds: [], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '4705', pageId: '1gs', userId: 'PaulChristiano', edit: '1', type: 'newEdit', createdAt: '2015-12-29 23:16:51', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '4700', pageId: '1gs', userId: 'PaulChristiano', edit: '0', type: 'newParent', createdAt: '2015-12-29 23:04:29', auxPageId: 'distant_SIs', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }