{
  localUrl: '../page/bayesian_prior.html',
  arbitalUrl: 'https://arbital.com/p/bayesian_prior',
  rawJsonUrl: '../raw/27p.json',
  likeableId: '1177',
  likeableType: 'page',
  myLikeValue: '0',
  likeCount: '1',
  dislikeCount: '0',
  likeScore: '1',
  individualLikes: [
    'EricBruylant'
  ],
  pageId: 'bayesian_prior',
  edit: '5',
  editSummary: '',
  prevEdit: '4',
  currentEdit: '5',
  wasPublished: 'true',
  type: 'wiki',
  title: 'Prior',
  clickbait: 'A state of prior knowledge, before seeing information on a new problem.  Potentially complicated.',
  textLength: '1155',
  alias: 'bayesian_prior',
  externalUrl: '',
  sortChildrenBy: 'likes',
  hasVote: 'false',
  voteType: '',
  votesAnonymous: 'false',
  editCreatorId: 'EliezerYudkowsky',
  editCreatedAt: '2016-03-04 06:11:28',
  pageCreatorId: 'EliezerYudkowsky',
  pageCreatedAt: '2016-03-04 04:37:50',
  seeDomainId: '0',
  editDomainId: 'AlexeiAndreev',
  submitToDomainId: '0',
  isAutosave: 'false',
  isSnapshot: 'false',
  isLiveEdit: 'true',
  isMinorEdit: 'false',
  indirectTeacher: 'false',
  todoCount: '0',
  isEditorComment: 'false',
  isApprovedComment: 'true',
  isResolved: 'false',
  snapshotText: '',
  anchorContext: '',
  anchorText: '',
  anchorOffset: '0',
  mergedInto: '',
  isDeleted: 'false',
  viewCount: '75',
  text: 'Our (potentially rich or complex) state of knowledge and *propensity to learn,* before seeing the evidence, expressed as a [1zj probability function].  This is a deeper and more general concept than '[-1rm]'.  A prior probability is like guessing the chance that it will be cloudy outside, in advance of looking out a window.  The more general notion of a Bayesian prior would include probability distributions that answered the question, "*Suppose* I saw the Sun rising on 999 successive days; would I afterwards think the probability of the Sun rising on the next day was more like 1000/1001, 1/2, or 1 - 10^-6?"  In a sense, a baby can be said to have a 'prior' before it opens its eyes, and then to develop a model of the world by updating on the evidence it sees after that point.  The baby's 'prior' expresses not just its current ignorance, but the different kinds of worlds the baby would end up believing in, depending on what sensory evidence they saw over the rest of their lives.  Key subconcepts include [219 ignorance priors] and [21b inductive priors], and key examples are [21c Laplace's Rule of Succession] and [11w Solomonoff induction].',
  metaText: '',
  isTextLoaded: 'true',
  isSubscribedToDiscussion: 'false',
  isSubscribedToUser: 'false',
  isSubscribedAsMaintainer: 'false',
  discussionSubscriberCount: '1',
  maintainerCount: '1',
  userSubscriberCount: '0',
  lastVisit: '',
  hasDraft: 'false',
  votes: [],
  voteSummary: [
    '0',
    '0',
    '0',
    '0',
    '0',
    '0',
    '0',
    '0',
    '0',
    '0'
  ],
  muVoteSummary: '0',
  voteScaling: '0',
  currentUserVote: '-2',
  voteCount: '0',
  lockedVoteType: '',
  maxEditEver: '0',
  redLinkCount: '0',
  lockedBy: '',
  lockedUntil: '',
  nextPageId: '',
  prevPageId: '',
  usedAsMastery: 'true',
  proposalEditNum: '0',
  permissions: {
    edit: {
      has: 'false',
      reason: 'You don't have domain permission to edit this page'
    },
    proposeEdit: {
      has: 'true',
      reason: ''
    },
    delete: {
      has: 'false',
      reason: 'You don't have domain permission to delete this page'
    },
    comment: {
      has: 'false',
      reason: 'You can't comment in this domain because you are not a member'
    },
    proposeComment: {
      has: 'true',
      reason: ''
    }
  },
  summaries: {
    Summary: 'Our (potentially rich or complex) state of knowledge and *propensity to learn,* before seeing the evidence, expressed as a [1zj probability function].  This is a deeper and more general concept than '[-1rm]'.  A prior probability is like guessing the chance that it will be cloudy outside, in advance of looking out a window.  The more general notion of a Bayesian prior would include probability distributions that answered the question, "*Suppose* I saw the Sun rising on 999 successive days; would I afterwards think the probability of the Sun rising on the next day was more like 1000/1001, 1/2, or 1 - 10^-6?"  In a sense, a baby can be said to have a 'prior' before it opens its eyes, and then to develop a model of the world by updating on the evidence it sees after that point.  The baby's 'prior' expresses not just its current ignorance, but the different kinds of worlds the baby would end up believing in, depending on what sensory evidence they saw over the rest of their lives.  Key subconcepts include [219 ignorance priors] and [21b inductive priors], and key examples are [21c Laplace's Rule of Succession] and [11w Solomonoff induction].'
  },
  creatorIds: [
    'EliezerYudkowsky'
  ],
  childIds: [],
  parentIds: [
    'bayes_reasoning'
  ],
  commentIds: [
    '27w'
  ],
  questionIds: [],
  tagIds: [
    'stub_meta_tag'
  ],
  relatedIds: [],
  markIds: [],
  explanations: [
    {
      id: '5846',
      parentId: 'bayesian_prior',
      childId: 'bayesian_prior',
      type: 'subject',
      creatorId: 'AlexeiAndreev',
      createdAt: '2016-08-02 17:17:23',
      level: '1',
      isStrong: 'true',
      everPublished: 'true'
    }
  ],
  learnMore: [],
  requirements: [
    {
      id: '2463',
      parentId: 'bayes_rule',
      childId: 'bayesian_prior',
      type: 'requirement',
      creatorId: 'AlexeiAndreev',
      createdAt: '2016-06-17 21:58:56',
      level: '1',
      isStrong: 'false',
      everPublished: 'true'
    }
  ],
  subjects: [
    {
      id: '5846',
      parentId: 'bayesian_prior',
      childId: 'bayesian_prior',
      type: 'subject',
      creatorId: 'AlexeiAndreev',
      createdAt: '2016-08-02 17:17:23',
      level: '1',
      isStrong: 'true',
      everPublished: 'true'
    }
  ],
  lenses: [],
  lensParentId: '',
  pathPages: [],
  learnMoreTaughtMap: {},
  learnMoreCoveredMap: {},
  learnMoreRequiredMap: {},
  editHistory: {},
  domainSubmissions: {},
  answers: [],
  answerCount: '0',
  commentCount: '0',
  newCommentCount: '0',
  linkedMarkCount: '0',
  changeLogs: [
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '18134',
      pageId: 'bayesian_prior',
      userId: 'AlexeiAndreev',
      edit: '0',
      type: 'deleteChild',
      createdAt: '2016-08-02 17:20:13',
      auxPageId: 'extraordinary_claims',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '18132',
      pageId: 'bayesian_prior',
      userId: 'AlexeiAndreev',
      edit: '0',
      type: 'newTeacher',
      createdAt: '2016-08-02 17:17:24',
      auxPageId: 'bayesian_prior',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '18133',
      pageId: 'bayesian_prior',
      userId: 'AlexeiAndreev',
      edit: '0',
      type: 'newSubject',
      createdAt: '2016-08-02 17:17:24',
      auxPageId: 'bayesian_prior',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8328',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '5',
      type: 'newEdit',
      createdAt: '2016-03-04 06:11:28',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8327',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '4',
      type: 'newEdit',
      createdAt: '2016-03-04 06:08:48',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8326',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '3',
      type: 'newEdit',
      createdAt: '2016-03-04 06:08:29',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8325',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '2',
      type: 'newEdit',
      createdAt: '2016-03-04 06:05:35',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8305',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '1',
      type: 'newChild',
      createdAt: '2016-03-04 04:38:05',
      auxPageId: 'extraordinary_claims',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8302',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '1',
      type: 'newEdit',
      createdAt: '2016-03-04 04:37:50',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8299',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newTag',
      createdAt: '2016-03-04 04:30:07',
      auxPageId: 'stub_meta_tag',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8297',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newRequirement',
      createdAt: '2016-03-04 04:30:02',
      auxPageId: 'bayes_rule',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '8295',
      pageId: 'bayesian_prior',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newParent',
      createdAt: '2016-03-04 04:28:59',
      auxPageId: 'bayes_reasoning',
      oldSettingsValue: '',
      newSettingsValue: ''
    }
  ],
  feedSubmissions: [],
  searchStrings: {},
  hasChildren: 'false',
  hasParents: 'true',
  redAliases: {},
  improvementTagIds: [],
  nonMetaTagIds: [],
  todos: [],
  slowDownMap: 'null',
  speedUpMap: 'null',
  arcPageIds: 'null',
  contentRequests: {
    improveStub: {
      likeableId: '3319',
      likeableType: 'contentRequest',
      myLikeValue: '0',
      likeCount: '1',
      dislikeCount: '0',
      likeScore: '1',
      individualLikes: [],
      id: '27',
      pageId: 'bayesian_prior',
      requestType: 'improveStub',
      createdAt: '2016-08-04 14:29:08'
    }
  }
}