{
  localUrl: '../page/cognitive_steganography.html',
  arbitalUrl: 'https://arbital.com/p/cognitive_steganography',
  rawJsonUrl: '../raw/3cq.json',
  likeableId: '2466',
  likeableType: 'page',
  myLikeValue: '0',
  likeCount: '2',
  dislikeCount: '0',
  likeScore: '2',
  individualLikes: [
    'NateSoares',
    'EricRogstad'
  ],
  pageId: 'cognitive_steganography',
  edit: '1',
  editSummary: '',
  prevEdit: '0',
  currentEdit: '1',
  wasPublished: 'true',
  type: 'wiki',
  title: 'Cognitive steganography',
  clickbait: 'Disaligned AIs that are modeling human psychology and trying to deceive their programmers will want to hide their internal thought processes from their programmers.',
  textLength: '1560',
  alias: 'cognitive_steganography',
  externalUrl: '',
  sortChildrenBy: 'likes',
  hasVote: 'false',
  voteType: '',
  votesAnonymous: 'false',
  editCreatorId: 'EliezerYudkowsky',
  editCreatedAt: '2016-04-28 23:15:34',
  pageCreatorId: 'EliezerYudkowsky',
  pageCreatedAt: '2016-04-28 23:15:34',
  seeDomainId: '0',
  editDomainId: 'EliezerYudkowsky',
  submitToDomainId: '0',
  isAutosave: 'false',
  isSnapshot: 'false',
  isLiveEdit: 'true',
  isMinorEdit: 'false',
  indirectTeacher: 'false',
  todoCount: '0',
  isEditorComment: 'false',
  isApprovedComment: 'true',
  isResolved: 'false',
  snapshotText: '',
  anchorContext: '',
  anchorText: '',
  anchorOffset: '0',
  mergedInto: '',
  isDeleted: 'false',
  viewCount: '252',
  text: 'Cognitive steganography is an [10g instrumental strategy] for AGIs that are already trying to [10f deceive their programmers] for any reason (e.g., because the AGI is disaligned and does not want the programmers to realize this and edit the AGI's utility function).  Whatever tools the programmers have for monitoring the AGI's thoughts, the AGI will not want those tools to helpfully print out all the AGIs plans for deception.  This could mean, e.g., trying to have the calculations occur somewhere else or in a weird side-mode that the tools can't read; or making the thoughts look like they're about something else; or burying any warning signs under [2qq an avalanche of false alarms too long for the programmers to manually check all of them]; or making the visible portions of the thoughts look innocuous.  ("Oh, it's trying to figure out what behavior we want to see!  That's good, right?")\n\nHopefully, the intention to figure out cognitive steganography and implement it, would occur before any cognitive steganography had actually been implemented.  This means that perhaps the most critical AI thought-monitoring tool, especially for monitoring an AGI that is or might be able to model human psychology, is the one that tries to detect a forming intention to defeat the thought-monitoring tool.  Similarly, so long as the AI's state is perfectly [reproducible], one could in principle go back and use more sophisticated later thought-monitoring tools to try to detect whether any intention toward cognitive steganography had occurred months earlier.',
  metaText: '',
  isTextLoaded: 'true',
  isSubscribedToDiscussion: 'false',
  isSubscribedToUser: 'false',
  isSubscribedAsMaintainer: 'false',
  discussionSubscriberCount: '2',
  maintainerCount: '1',
  userSubscriberCount: '0',
  lastVisit: '',
  hasDraft: 'false',
  votes: [],
  voteSummary: 'null',
  muVoteSummary: '0',
  voteScaling: '0',
  currentUserVote: '-2',
  voteCount: '0',
  lockedVoteType: '',
  maxEditEver: '0',
  redLinkCount: '0',
  lockedBy: '',
  lockedUntil: '',
  nextPageId: '',
  prevPageId: '',
  usedAsMastery: 'false',
  proposalEditNum: '0',
  permissions: {
    edit: {
      has: 'false',
      reason: 'You don't have domain permission to edit this page'
    },
    proposeEdit: {
      has: 'true',
      reason: ''
    },
    delete: {
      has: 'false',
      reason: 'You don't have domain permission to delete this page'
    },
    comment: {
      has: 'false',
      reason: 'You can't comment in this domain because you are not a member'
    },
    proposeComment: {
      has: 'true',
      reason: ''
    }
  },
  summaries: {},
  creatorIds: [
    'EliezerYudkowsky'
  ],
  childIds: [],
  parentIds: [
    'programmer_deception'
  ],
  commentIds: [],
  questionIds: [],
  tagIds: [
    'stub_meta_tag'
  ],
  relatedIds: [],
  markIds: [],
  explanations: [],
  learnMore: [],
  requirements: [],
  subjects: [],
  lenses: [],
  lensParentId: '',
  pathPages: [],
  learnMoreTaughtMap: {},
  learnMoreCoveredMap: {},
  learnMoreRequiredMap: {},
  editHistory: {},
  domainSubmissions: {},
  answers: [],
  answerCount: '0',
  commentCount: '0',
  newCommentCount: '0',
  linkedMarkCount: '0',
  changeLogs: [
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '9461',
      pageId: 'cognitive_steganography',
      userId: 'EliezerYudkowsky',
      edit: '1',
      type: 'newEdit',
      createdAt: '2016-04-28 23:15:34',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '9458',
      pageId: 'cognitive_steganography',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newTag',
      createdAt: '2016-04-28 23:06:34',
      auxPageId: 'stub_meta_tag',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '9457',
      pageId: 'cognitive_steganography',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newParent',
      createdAt: '2016-04-28 23:05:51',
      auxPageId: 'programmer_deception',
      oldSettingsValue: '',
      newSettingsValue: ''
    }
  ],
  feedSubmissions: [],
  searchStrings: {},
  hasChildren: 'false',
  hasParents: 'true',
  redAliases: {},
  improvementTagIds: [],
  nonMetaTagIds: [],
  todos: [],
  slowDownMap: 'null',
  speedUpMap: 'null',
  arcPageIds: 'null',
  contentRequests: {}
}