{
  localUrl: '../page/KANSI.html',
  arbitalUrl: 'https://arbital.com/p/KANSI',
  rawJsonUrl: '../raw/1fy.json',
  likeableId: 'BenjaminLevy',
  likeableType: 'page',
  myLikeValue: '0',
  likeCount: '3',
  dislikeCount: '0',
  likeScore: '3',
  individualLikes: [
    'AndrewMcKnight',
    'JosephRatliff',
    'RonnyFernandez'
  ],
  pageId: 'KANSI',
  edit: '3',
  editSummary: '',
  prevEdit: '2',
  currentEdit: '3',
  wasPublished: 'true',
  type: 'wiki',
  title: 'Known-algorithm non-self-improving agent',
  clickbait: 'Possible advanced AIs that aren't self-modifying, aren't self-improving, and where we know and understand all the component algorithms.',
  textLength: '2922',
  alias: 'KANSI',
  externalUrl: '',
  sortChildrenBy: 'likes',
  hasVote: 'false',
  voteType: '',
  votesAnonymous: 'false',
  editCreatorId: 'EliezerYudkowsky',
  editCreatedAt: '2015-12-28 21:43:03',
  pageCreatorId: 'EliezerYudkowsky',
  pageCreatedAt: '2015-12-28 20:56:47',
  seeDomainId: '0',
  editDomainId: 'EliezerYudkowsky',
  submitToDomainId: '0',
  isAutosave: 'false',
  isSnapshot: 'false',
  isLiveEdit: 'true',
  isMinorEdit: 'false',
  indirectTeacher: 'false',
  todoCount: '1',
  isEditorComment: 'false',
  isApprovedComment: 'true',
  isResolved: 'false',
  snapshotText: '',
  anchorContext: '',
  anchorText: '',
  anchorOffset: '0',
  mergedInto: '',
  isDeleted: 'false',
  viewCount: '1422',
  text: '"Known-algorithm non-self-improving" (KANSI) is a strategic scenario and class of possibly-attainable AI designs, where the first [6y pivotal] [2c powerful AI] has been constructed out of known, human-understood algorithms and is not engaging in extensive self-modification.  Its advantage might be achieved by, e.g., being run on a very large cluster of computers.  If you could build an AI that was capable of cracking protein folding and building nanotechnology, by running correctly structured algorithms akin to deep learning on Google's or Amazon's computing cluster, and the builders were sufficiently paranoid/sensible to have people continuously monitoring this AI's processes and all the problems it was trying to solve and *not* having this AI engage in self-modification or self-improvement, this would fall into the KANSI class of scenarios.  This would imply that huge classes of problems in [1fx reflective stability], [5c ontology identification], limiting potentially dangerous capabilities, etcetera, would be much simpler.\n\nRestricting 'good' or approved AI development to KANSI designs would mean deliberately foregoing whatever capability gains might be possible through self-improvement.  It's not known whether a KANSI AI could be *first* to some pivotal level of capability.  This would depend on unknown background settings about how much capability can be gained, at what stage, by self-modification.  Depending on these background variables, making a KANSI AI be first to a capability threshold might or might not be something that could be accomplished by any reasonable level of effort and coordination.  This is one reason among several why [15w MIRI] does not, e.g., restrict its attention to KANSI designs.\n\nJust intending to build a non-self-improving AI out of known algorithms is insufficient to ensure KANSI as a property; this might require further solutions along the lines of [45].  E.g., humans can't modify their own brain functions, but because we're [ general consequentialists] and we [1fx don't always think the way we want to think], we created quite simple innovations like, e.g., calculators, out of environmental objects in a world that didn't have any built-in calculators, so that we could think about arithmetic in a different way than we did by default.  A KANSI design with a large divergence between how it thinks and how it wants to think might behave similarly, or require constant supervision to detect *most* cases of the AI starting to behave similarly - and then some cases might slip through the cracks.  Since our present study and understanding of reflective stability is very primitive, we're plausibly still in the field of things we should be studying even if we want to build a KANSI agent, just to have the KANSI agent not be too *wildly* divergent in distance between how it thinks about X, and how it would prefer to think about X if given the choice.',
  metaText: '',
  isTextLoaded: 'true',
  isSubscribedToDiscussion: 'false',
  isSubscribedToUser: 'false',
  isSubscribedAsMaintainer: 'false',
  discussionSubscriberCount: '4',
  maintainerCount: '1',
  userSubscriberCount: '0',
  lastVisit: '2016-02-21 14:22:30',
  hasDraft: 'false',
  votes: [],
  voteSummary: 'null',
  muVoteSummary: '0',
  voteScaling: '0',
  currentUserVote: '-2',
  voteCount: '0',
  lockedVoteType: '',
  maxEditEver: '0',
  redLinkCount: '0',
  lockedBy: '',
  lockedUntil: '',
  nextPageId: '',
  prevPageId: '',
  usedAsMastery: 'false',
  proposalEditNum: '0',
  permissions: {
    edit: {
      has: 'false',
      reason: 'You don't have domain permission to edit this page'
    },
    proposeEdit: {
      has: 'true',
      reason: ''
    },
    delete: {
      has: 'false',
      reason: 'You don't have domain permission to delete this page'
    },
    comment: {
      has: 'false',
      reason: 'You can't comment in this domain because you are not a member'
    },
    proposeComment: {
      has: 'true',
      reason: ''
    }
  },
  summaries: {},
  creatorIds: [
    'EliezerYudkowsky'
  ],
  childIds: [],
  parentIds: [
    'AGI_typology'
  ],
  commentIds: [
    '1gp',
    '7pz'
  ],
  questionIds: [],
  tagIds: [
    'work_in_progress_meta_tag'
  ],
  relatedIds: [
    'behaviorist'
  ],
  markIds: [],
  explanations: [],
  learnMore: [],
  requirements: [],
  subjects: [],
  lenses: [],
  lensParentId: '',
  pathPages: [],
  learnMoreTaughtMap: {},
  learnMoreCoveredMap: {},
  learnMoreRequiredMap: {},
  editHistory: {},
  domainSubmissions: {},
  answers: [],
  answerCount: '0',
  commentCount: '0',
  newCommentCount: '0',
  linkedMarkCount: '0',
  changeLogs: [
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4617',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '3',
      type: 'newUsedAsTag',
      createdAt: '2015-12-28 22:32:11',
      auxPageId: 'behaviorist',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4569',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '3',
      type: 'newEdit',
      createdAt: '2015-12-28 21:43:03',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4568',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '2',
      type: 'newTag',
      createdAt: '2015-12-28 21:42:55',
      auxPageId: 'work_in_progress_meta_tag',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4566',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'deleteTag',
      createdAt: '2015-12-28 21:42:51',
      auxPageId: 'stub_meta_tag',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4564',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'deleteTag',
      createdAt: '2015-12-28 21:42:45',
      auxPageId: 'task_agi',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4562',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '2',
      type: 'newParent',
      createdAt: '2015-12-28 21:42:39',
      auxPageId: 'AGI_typology',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4559',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'deleteParent',
      createdAt: '2015-12-28 21:42:24',
      auxPageId: 'ai_alignment',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4528',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '2',
      type: 'newEdit',
      createdAt: '2015-12-28 20:57:23',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4527',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '1',
      type: 'newEdit',
      createdAt: '2015-12-28 20:56:47',
      auxPageId: '',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4513',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newTag',
      createdAt: '2015-12-28 20:03:38',
      auxPageId: 'stub_meta_tag',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4511',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newTag',
      createdAt: '2015-12-28 20:03:21',
      auxPageId: 'task_agi',
      oldSettingsValue: '',
      newSettingsValue: ''
    },
    {
      likeableId: '0',
      likeableType: 'changeLog',
      myLikeValue: '0',
      likeCount: '0',
      dislikeCount: '0',
      likeScore: '0',
      individualLikes: [],
      id: '4509',
      pageId: 'KANSI',
      userId: 'EliezerYudkowsky',
      edit: '0',
      type: 'newParent',
      createdAt: '2015-12-28 20:02:59',
      auxPageId: 'ai_alignment',
      oldSettingsValue: '',
      newSettingsValue: ''
    }
  ],
  feedSubmissions: [],
  searchStrings: {},
  hasChildren: 'false',
  hasParents: 'true',
  redAliases: {},
  improvementTagIds: [],
  nonMetaTagIds: [],
  todos: [],
  slowDownMap: 'null',
  speedUpMap: 'null',
  arcPageIds: 'null',
  contentRequests: {}
}