{ localUrl: '../page/991.html', arbitalUrl: 'https://arbital.com/p/991', rawJsonUrl: '../raw/991.json', likeableId: '0', likeableType: 'page', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], pageId: '991', edit: '5', editSummary: '', prevEdit: '4', currentEdit: '5', wasPublished: 'true', type: 'comment', title: '"I wrote this out for myself in attempt to fully..."', clickbait: '', textLength: '1303', alias: '991', externalUrl: '', sortChildrenBy: 'recentFirst', hasVote: 'false', voteType: '', votesAnonymous: 'false', editCreatorId: 'ViktorRiabtsev', editCreatedAt: '2018-10-11 14:46:55', pageCreatorId: 'ViktorRiabtsev', pageCreatedAt: '2018-10-10 18:47:43', seeDomainId: '0', editDomainId: '3188', submitToDomainId: '0', isAutosave: 'false', isSnapshot: 'false', isLiveEdit: 'true', isMinorEdit: 'false', indirectTeacher: 'false', todoCount: '0', isEditorComment: 'false', isApprovedComment: 'false', isResolved: 'false', snapshotText: '', anchorContext: '"Well, I believe it somewhere in the range of four and a half times as much as I did previously," says Aristotle\\. "But that part where you're plugging in numbers like 9\\.8 and calculations like the square of the time strike me as kinda complicated\\. Like, if I'm allowed to plug in numbers that precise, and do things like square them, there must be hundreds of different theories I could make which would be that complicated\\. By the quantitative form of Occam's Razor, we need to penalize the prior probability of your theory for its algorithmic complexity\\. One observation with a likelihood ratio of 4\\.5 : 1 isn't enough to support all that complexity\\. I'm not going to believe something that complicated because I see a stopwatch showing '3' just that one time\\! I need to see more objects dropped from various different heights and verify that the times are what you say they should be\\. If I say the prior complexity of your theory is, say, 20 bits, then 9 more observations like this would do it\\. Of course, I expect you've already made more observations than that in private, but it only becomes part of the public knowledge of humankind after someone replicates it\\."', anchorText: ' I'm not going to believe something that complicated because I see a stopwatch showing '3' just that one time\\! I need to see more objects dropped from various different heights and verify that the times are what you say they should be\\. If I say the prior complexity of your theory is, say, 20 bits, then 9 more observations like this would do it\\.', anchorOffset: '664', mergedInto: '', isDeleted: 'false', viewCount: '1589', text: 'I wrote this out for myself in attempt to fully grasp this and maybe someone else might find it useful:\n\nYou have two theories, A and B. A is more complex then B, but has sharper/more precise predictions for it's observables.\ni.e. given a test, where it's either +-ve or -ve (true or false), then we necessitate that P(+ | A) > P(+ | B). \n\nSay that P(+ | A) : P(+ | B) = 10 : 1, a favorable likelihood ratio. \n\nThen each successful +-ve test gives 10 : 1 odds for theory A over theory B.\nYou can penalize A initially for algorithmic complexity and estimate/assign it 1 : 10^5 odds for it; i.e. you think it is borderline absurd.\n\nBut if you get 5 consecutive +-ve tests, then your posterior odds become 1 : 1; meaning your initial odds estimate was grossly wrong. \nIn fact, given 5 more consecutive +-ve tests, it is theory B which should at this point be considered absurd.\n\n\nOf course in real problems, the favorable likelihood ratio could be as low as 1.1 : 1, and your prior odds are not as ridiculous; maybe 1 : 100 against. \nThen you'd need about 50 updates before you get posterior odds of about 1 : 1. You then seriously question the validity of your prior odds.\nAfter another 50 updates, you're essentially fully convinced that the new theory contestant is much better then the original theory.', metaText: '', isTextLoaded: 'true', isSubscribedToDiscussion: 'false', isSubscribedToUser: 'false', isSubscribedAsMaintainer: 'false', discussionSubscriberCount: '1', maintainerCount: '1', userSubscriberCount: '0', lastVisit: '', hasDraft: 'false', votes: [], voteSummary: 'null', muVoteSummary: '0', voteScaling: '0', currentUserVote: '-2', voteCount: '0', lockedVoteType: '', maxEditEver: '0', redLinkCount: '0', lockedBy: '', lockedUntil: '', nextPageId: '', prevPageId: '', usedAsMastery: 'false', proposalEditNum: '0', permissions: { edit: { has: 'false', reason: 'You don't have domain permission to edit this page' }, proposeEdit: { has: 'true', reason: '' }, delete: { has: 'false', reason: 'You don't have domain permission to delete this page' }, comment: { has: 'false', reason: 'You can't comment in this domain because you are not a member' }, proposeComment: { has: 'true', reason: '' } }, summaries: {}, creatorIds: [ 'ViktorRiabtsev' ], childIds: [], parentIds: [ 'bayes_science_virtues' ], commentIds: [], questionIds: [], tagIds: [], relatedIds: [], markIds: [], explanations: [], learnMore: [], requirements: [], subjects: [], lenses: [], lensParentId: '', pathPages: [], learnMoreTaughtMap: {}, learnMoreCoveredMap: {}, learnMoreRequiredMap: {}, editHistory: {}, domainSubmissions: {}, answers: [], answerCount: '0', commentCount: '0', newCommentCount: '0', linkedMarkCount: '0', changeLogs: [ { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '23105', pageId: '991', userId: 'ViktorRiabtsev', edit: '5', type: 'newEdit', createdAt: '2018-10-11 14:46:55', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '23104', pageId: '991', userId: 'ViktorRiabtsev', edit: '4', type: 'newEdit', createdAt: '2018-10-10 18:49:16', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '23103', pageId: '991', userId: 'ViktorRiabtsev', edit: '3', type: 'newEdit', createdAt: '2018-10-10 18:48:54', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '23102', pageId: '991', userId: 'ViktorRiabtsev', edit: '2', type: 'newEdit', createdAt: '2018-10-10 18:48:22', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' }, { likeableId: '0', likeableType: 'changeLog', myLikeValue: '0', likeCount: '0', dislikeCount: '0', likeScore: '0', individualLikes: [], id: '23101', pageId: '991', userId: 'ViktorRiabtsev', edit: '1', type: 'newEdit', createdAt: '2018-10-10 18:47:43', auxPageId: '', oldSettingsValue: '', newSettingsValue: '' } ], feedSubmissions: [], searchStrings: {}, hasChildren: 'false', hasParents: 'true', redAliases: {}, improvementTagIds: [], nonMetaTagIds: [], todos: [], slowDownMap: 'null', speedUpMap: 'null', arcPageIds: 'null', contentRequests: {} }