{ localUrl: '../page/expected_utility_agent.html', arbitalUrl: 'https://arbital.com/p/expected_utility_agent', rawJsonUrl: '../raw/18r.json', likeableId: '249', likeableType: 'page', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], pageId: 'expected_utility_agent', edit: '7', editSummary: '', prevEdit: '6', currentEdit: '7', wasPublished: 'true', type: 'wiki', title: 'Expected utility agent', clickbait: 'If you're not some kind of expected utility agent, you're going in circles.', textLength: '2372', alias: 'expected_utility_agent', externalUrl: '', sortChildrenBy: 'likes', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'EliezerYudkowsky', editCreatedAt: '2017-01-20 05:35:34', pageCreatorId: 'EliezerYudkowsky', pageCreatedAt: '2015-12-02 22:39:04', seeDomainId: '0', editDomainId: '15', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '13', isEditorComment: 'false', isApprovedComment: 'true', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '41', text: 'An [18t] agent has some way of consistently scoring all the possible outcomes of its actions, like assigning 20 points to saving a burning orphanage. The agent weighs its actions by estimating the [probabilistic_expectation average expected] score of an action's consequences. For example, an action with a 50% chance of leading to an outcome with utility 20, a 25% chance of leading to an outcome with utility 35, and a 25% chance of leading to an outcome with utility 45, would have an expected utility of 30. These utilities can potentially reflect [1y any sort] of morality or values - selfishness, altruism, or [10h paperclips]. Several [ famous mathematical theorems] suggest that if you can't be viewed as some type of expected utility agent, you must be [circular_preferences going in circles], [Dutch_book_argument making bad bets], or exhibiting other detrimental behaviors. Several [ famous experiments] show that human beings do exhibit those behaviors, and [ can't be viewed as expected utility agents].\n\n[summary(Brief): An [18t] agent has some way of scoring the consequences of its actions (e.g., rescuing a burning orphanage is worth 20 points), and it weighs actions according to their expected scores. This simple-sounding assumption has a lot of consequences.]\n\n[summary(Technical): An [ agent] with a [ coherent] [ utility function] over [ outcomes] and a coherent [action_counterfactuals counterfactual] [ probability function] that relates its accessible [ actions] to their probable outcomes. Combining the utility function on outcomes, with the probability function from actions to outcomes, yields an action's expectation of utility. Most such agents treated in the literature are [expected_utility_maximizer maximizers], but [2r9 other forms of optimization] could also qualify if the [ decision rule] equivalently treated actions of equivalent expected utility. Several [ famous coherence theorems] suggest that any agent not exhibiting stupid behavior must be viewable as an expected utility agent.]\n\n[todo: (Alexei: Is this line necessary if we have the summary paragraph visible?) An expected utility agent is an agent whose decision rule treats two actions equivalently whenever they have the same \\expected_utility\\.]\n\n[todo: write longer explanation of expected utility, the consequences of the assumption, and an introduction.]', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '1', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '2016-02-13 13:02:10', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'EliezerYudkowsky', 'AlexeiAndreev' ], childIds: [], parentIds: [ 'expected_utility_formalism' ], commentIds: [ '1j9' ], questionIds: [], tagIds: [ 'work_in_progress_meta_tag' ], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '21799', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '7', type: 'newEdit', createdAt: '2017-01-20 05:35:34', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '21798', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '0', type: 'newEditGroup', createdAt: '2017-01-20 05:35:33', auxPageId: '15', oldSettingsValue: '123', newSettingsValue: '15' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3942', pageId: 'expected_utility_agent', userId: 'AlexeiAndreev', edit: '6', type: 'newEdit', createdAt: '2015-12-16 17:11:52', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3642', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '5', type: 'newTag', createdAt: '2015-12-03 20:19:42', auxPageId: 'work_in_progress_meta_tag', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3621', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '5', type: 'newEdit', createdAt: '2015-12-03 00:00:07', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3620', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '4', type: 'newParent', createdAt: '2015-12-03 00:00:04', auxPageId: 'expected_utility_formalism', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3618', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '0', type: 'deleteParent', createdAt: '2015-12-03 00:00:01', auxPageId: 'expected_utility', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3606', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '4', type: 'newEdit', createdAt: '2015-12-02 23:41:34', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3605', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '3', type: 'newEdit', createdAt: '2015-12-02 23:38:07', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3604', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '2', type: 'newParent', createdAt: '2015-12-02 23:36:37', auxPageId: 'expected_utility', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3593', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '2', type: 'newEdit', createdAt: '2015-12-02 22:44:32', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3592', pageId: 'expected_utility_agent', userId: 'EliezerYudkowsky', edit: '1', type: 'newEdit', createdAt: '2015-12-02 22:39:04', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }