{
localUrl: '../page/3nh.html',
arbitalUrl: 'https://arbital.com/p/3nh',
rawJsonUrl: '../raw/3nh.json',
likeableId: '0',
likeableType: 'page',
myLikeValue: '0',
likeCount: '0',
dislikeCount: '0',
likeScore: '0',
individualLikes: [],
pageId: '3nh',
edit: '1',
editSummary: '',
prevEdit: '0',
currentEdit: '1',
wasPublished: 'true',
type: 'comment',
title: '"Darn it, I wanted to use th..."',
clickbait: '',
textLength: '606',
alias: '3nh',
externalUrl: '',
sortChildrenBy: 'recentFirst',
hasVote: 'false',
voteType: '',
votesAnonymous: 'false',
editCreatorId: 'EliezerYudkowsky',
editCreatedAt: '2016-05-16 06:41:46',
pageCreatorId: 'EliezerYudkowsky',
pageCreatedAt: '2016-05-16 06:41:46',
seeDomainId: '0',
editDomainId: '123',
submitToDomainId: '0',
isAutosave: 'false',
isSnapshot: 'false',
isLiveEdit: 'true',
isMinorEdit: 'false',
indirectTeacher: 'false',
todoCount: '0',
isEditorComment: 'false',
isApprovedComment: 'true',
isResolved: 'false',
snapshotText: '',
anchorContext: '',
anchorText: '',
anchorOffset: '0',
mergedInto: '',
isDeleted: 'false',
viewCount: '50',
text: 'Darn it, I wanted to use this term to distinguish "not-explictly-consequentialistically optimizing for $Y$ still optimizes for $X$ when $X$ is being varied and is causally relevant to $Y$" from "having an explicit model of $X$ being relevant to $Y$ and therefore explicitly forming goals about $X$ and searching for strategies that affect $X.$" (E.g., natural selection does implicit consequentialism, humans do explicit consequentialism.) I'm not sure if I can think of an equally good replacement term for the thing I wanted to say. Would "proxy consequentialism" work for the thing you wanted to say?',
metaText: '',
isTextLoaded: 'true',
isSubscribedToDiscussion: 'false',
isSubscribedToUser: 'false',
isSubscribedAsMaintainer: 'false',
discussionSubscriberCount: '1',
maintainerCount: '1',
userSubscriberCount: '0',
lastVisit: '',
hasDraft: 'false',
votes: [],
voteSummary: 'null',
muVoteSummary: '0',
voteScaling: '0',
currentUserVote: '-2',
voteCount: '0',
lockedVoteType: '',
maxEditEver: '0',
redLinkCount: '0',
lockedBy: '',
lockedUntil: '',
nextPageId: '',
prevPageId: '',
usedAsMastery: 'false',
proposalEditNum: '0',
permissions: {
edit: {
has: 'false',
reason: 'You don't have domain permission to edit this page'
},
proposeEdit: {
has: 'true',
reason: ''
},
delete: {
has: 'false',
reason: 'You don't have domain permission to delete this page'
},
comment: {
has: 'false',
reason: 'You can't comment in this domain because you are not a member'
},
proposeComment: {
has: 'true',
reason: ''
}
},
summaries: {},
creatorIds: [
'EliezerYudkowsky'
],
childIds: [],
parentIds: [
'implicit_consequentialism'
],
commentIds: [],
questionIds: [],
tagIds: [],
relatedIds: [],
markIds: [],
explanations: [],
learnMore: [],
requirements: [],
subjects: [],
lenses: [],
lensParentId: '',
pathPages: [],
learnMoreTaughtMap: {},
learnMoreCoveredMap: {},
learnMoreRequiredMap: {},
editHistory: {},
domainSubmissions: {},
answers: [],
answerCount: '0',
commentCount: '0',
newCommentCount: '0',
linkedMarkCount: '0',
changeLogs: [
{
likeableId: '0',
likeableType: 'changeLog',
myLikeValue: '0',
likeCount: '0',
dislikeCount: '0',
likeScore: '0',
individualLikes: [],
id: '10476',
pageId: '3nh',
userId: 'EliezerYudkowsky',
edit: '1',
type: 'newEdit',
createdAt: '2016-05-16 06:41:46',
auxPageId: '',
oldSettingsValue: '',
newSettingsValue: ''
},
{
likeableId: '0',
likeableType: 'changeLog',
myLikeValue: '0',
likeCount: '0',
dislikeCount: '0',
likeScore: '0',
individualLikes: [],
id: '10475',
pageId: '3nh',
userId: 'EliezerYudkowsky',
edit: '1',
type: 'newParent',
createdAt: '2016-05-16 06:38:47',
auxPageId: 'implicit_consequentialism',
oldSettingsValue: '',
newSettingsValue: ''
}
],
feedSubmissions: [],
searchStrings: {},
hasChildren: 'false',
hasParents: 'true',
redAliases: {},
improvementTagIds: [],
nonMetaTagIds: [],
todos: [],
slowDownMap: 'null',
speedUpMap: 'null',
arcPageIds: 'null',
contentRequests: {}
}