{ localUrl: '../page/89m.html', arbitalUrl: 'https://arbital.com/p/89m', rawJsonUrl: '../raw/89m.json', likeableId: '0', likeableType: 'page', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], pageId: '89m', edit: '1', editSummary: '', prevEdit: '0', currentEdit: '1', wasPublished: 'true', type: 'comment', title: '"You could argue that a superintelligence would ..."', clickbait: '', textLength: '693', alias: '89m', externalUrl: '', sortChildrenBy: 'recentFirst', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'RyanCarey2', editCreatedAt: '2017-04-25 05:56:09', pageCreatorId: 'RyanCarey2', pageCreatedAt: '2017-04-25 05:56:09', seeDomainId: '0', editDomainId: '2142', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'false', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '281', text: 'You could argue that a superintelligence would be efficient at all tasks as follows:\n\nAssume that:\n1. An AI will not knowingly be biased (if it knew it had a bias, it would correct it).\n2. predicting the residual error of one's predictions is a task that superintelligences are definitionally better at than humans.\n\nThen: superintelligences are efficient at all tasks.\n\nThe proof is by contradiction. Suppose a superintelligence has some residual error in some task that humans can predict. Then, by (2), the superintelligence can also predict that residual error. But (1) asserts that a superintelligence cannot know that it is biased, so a superintelligence cannot be efficient at any task.', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '1', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'RyanCarey2' ], childIds: [], parentIds: [ 'efficiency' ], commentIds: [], questionIds: [], tagIds: [], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '22497', pageId: '89m', userId: 'RyanCarey2', edit: '1', type: 'newEdit', createdAt: '2017-04-25 05:56:09', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }