{ localUrl: '../page/intension_extension.html', arbitalUrl: 'https://arbital.com/p/intension_extension', rawJsonUrl: '../raw/10b.json', likeableId: '13', likeableType: 'page', myLikeValue: '0', likeCount: '2', dislikeCount: '0', likeScore: '2', individualLikes: [ 'TanyaIvanova', 'EliezerYudkowsky' ], pageId: 'intension_extension', edit: '3', editSummary: '', prevEdit: '2', currentEdit: '3', wasPublished: 'true', type: 'wiki', title: 'Intension vs. extension', clickbait: '"Red is a light with a wavelength of 700 nm" vs. "Look at this red apple, red car, and red cup."', textLength: '1265', alias: 'intension_extension', externalUrl: '', sortChildrenBy: 'likes', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'AlexeiAndreev', editCreatedAt: '2015-12-16 17:20:57', pageCreatorId: 'EliezerYudkowsky', pageCreatedAt: '2015-07-15 23:06:43', seeDomainId: '0', editDomainId: '123', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'true', isResolved: 'false', snapshotText: '', anchorContext: '', anchorText: '', anchorOffset: '0', mergedInto: '', isDeleted: 'false', viewCount: '49', text: 'To give an "intensional definition" is to define a word or phrase in terms of other words, as a dictionary does. To give an "extensional definition" is to point to examples, as adults do when teaching children. The preceding sentence gives an intensional definition of "extensional definition", which makes it an extensional example of "intensional definition". See http://lesswrong.com/lw/nh/extensions_and_intensions/\n\nIn the context of AI, an "intensional concept" is the code or statistical pattern that executes to determine whether something is a member of the concept, while the "extension" is the set of things that are thus determined to belong to the concept. The intensional concept "test: does 2 evenly divide x?" recognizes the even numbers 0, 2, 4, 6... as its extension.\n\nGiven the modern level of visual recognition technology, a neural network that tries to classify cat photos vs. noncat photos would have some cat photos in its extension, but almost certainly also many things we think are 'cat photos' that would fail to be in its extension and many non-cat-photos that did end up in the extension of that particular neural network. The intensional concept would be the classifier network itself - its weights, propagation rules, and so on.', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '3', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '2016-02-16 03:54:46', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'EliezerYudkowsky', 'AlexeiAndreev' ], childIds: [], parentIds: [ 'epistemology' ], commentIds: [], questionIds: [], tagIds: [ 'stub_meta_tag' ], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3949', pageId: 'intension_extension', userId: 'AlexeiAndreev', edit: '0', type: 'newAlias', createdAt: '2015-12-16 17:20:57', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '3950', pageId: 'intension_extension', userId: 'AlexeiAndreev', edit: '3', type: 'newEdit', createdAt: '2015-12-16 17:20:57', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '1098', pageId: 'intension_extension', userId: 'AlexeiAndreev', edit: '1', type: 'newUsedAsTag', createdAt: '2015-10-28 03:47:09', auxPageId: 'stub_meta_tag', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '252', pageId: 'intension_extension', userId: 'AlexeiAndreev', edit: '1', type: 'newParent', createdAt: '2015-10-28 03:46:51', auxPageId: 'epistemology', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '1434', pageId: 'intension_extension', userId: 'EliezerYudkowsky', edit: '2', type: 'newEdit', createdAt: '2015-07-15 23:11:14', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '1433', pageId: 'intension_extension', userId: 'EliezerYudkowsky', edit: '1', type: 'newEdit', createdAt: '2015-07-15 23:06:43', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }