{ localUrl: '../page/2rw.html', arbitalUrl: 'https://arbital.com/p/2rw', rawJsonUrl: '../raw/2rw.json', likeableId: '1692', likeableType: 'page', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], pageId: '2rw', edit: '2', editSummary: '', prevEdit: '1', currentEdit: '2', wasPublished: 'true', type: 'comment', title: '"Well, the *purpose* is to a..."', clickbait: '', textLength: '1070', alias: '2rw', externalUrl: '', sortChildrenBy: 'recentFirst', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'EliezerYudkowsky', editCreatedAt: '2016-03-23 04:25:18', pageCreatorId: 'EliezerYudkowsky', pageCreatedAt: '2016-03-23 04:23:06', seeDomainId: '0', editDomainId: 'EliezerYudkowsky', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'true', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '345', text: 'Well, the *purpose* is to avoid the AGI classifying potential goal fulfillments in a way that, from the user's perspective, is a "false positive". The reason why we have to spend a lot of time thinking about really, really good ways to have the AGI not guess positive labels on things that we wouldn't label as positive, is that the training data we present to the AI may be ambiguous in some way we don't know about, or many ways we don't know about. Meaning that the AI does not actually have the information to figure out what we meant by looking for the simplest ways to classify the training cases, and instead has to do something that's very very similar to the positively labeled training instances to minimize the probability of screwing up.\n\nI'm pushing back a little on this "classifier that avoids false positives" description because that's what *every* classifier is in some sense intended to do; you have to be specific about how, or what approach you're taking, in order to say something that means more than just "classifier that is a good classifier".', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '0', maintainerCount: '0', userSubscriberCount: '0', lastVisit: '', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'EliezerYudkowsky' ], childIds: [], parentIds: [ 'conservative_concept', '2rt' ], commentIds: [], questionIds: [], tagIds: [], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '8937', pageId: '2rw', userId: 'EliezerYudkowsky', edit: '2', type: 'newEdit', createdAt: '2016-03-23 04:25:18', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '8936', pageId: '2rw', userId: 'EliezerYudkowsky', edit: '1', type: 'newEdit', createdAt: '2016-03-23 04:23:06', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '8933', pageId: '2rw', userId: 'EliezerYudkowsky', edit: '0', type: 'newParent', createdAt: '2016-03-23 04:20:39', auxPageId: 'conservative_concept', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '8935', pageId: '2rw', userId: 'EliezerYudkowsky', edit: '0', type: 'newParent', createdAt: '2016-03-23 04:20:39', auxPageId: '2rt', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }