Revert "Make probability accurate by condition probability on the value."

This reverts commit 3fa1867340.
master
Mike Bryant 4 years ago
parent a5194879d0
commit 8f49882245

@ -9,6 +9,13 @@ const PATTERN = {
SMALL_SPIKE: 3,
};
const PATTERN_COUNTS = {
[PATTERN.FLUCTUATING]: 56,
[PATTERN.LARGE_SPIKE]: 7,
[PATTERN.DECREASING]: 1,
[PATTERN.SMALL_SPIKE]: 8,
};
const PROBABILITY_MATRIX = {
[PATTERN.FLUCTUATING]: {
[PATTERN.FLUCTUATING]: 0.20,
@ -50,42 +57,10 @@ function maximum_rate_from_given_and_base(given_price, buy_price) {
return RATE_MULTIPLIER * (given_price + 0.00001) / buy_price;
}
function rate_range_from_given_and_base(given_price, buy_price) {
return [
minimum_rate_from_given_and_base(given_price, buy_price),
maximum_rate_from_given_and_base(given_price, buy_price)
];
}
function get_price(rate, basePrice) {
return intceil(rate * basePrice / RATE_MULTIPLIER);
}
function* multiply_generator_probability(generator, probability) {
for (const it of generator) {
yield {...it, probability: it.probability * probability};
}
}
function range_length(range) {
return range[1] - range[0];
}
function range_intersect(range1, range2) {
if (range1[0] > range2[1] || range1[1] < range2[0]) {
return null;
}
return [Math.max(range1[0], range2[0]), Math.min(range1[1], range2[1])];
}
function range_intersect_length(range1, range2) {
if (range1[0] > range2[1] || range1[1] < range2[0]) {
return 0;
}
return range_length(range_intersect(range1, range2));
}
/*
* This corresponds to the code:
* for (int i = start; i < start + length; i++)
@ -94,9 +69,8 @@ function range_intersect_length(range1, range2) {
* intceil(randfloat(rate_min / RATE_MULTIPLIER, rate_max / RATE_MULTIPLIER) * basePrice);
* }
*
* Would return the conditional probability given the given_prices, and modify
* the predicted_prices array.
* If the given_prices won't match, returns 0.
* Would modify the predicted_prices array.
* If the given_prices won't match, returns false, otherwise returns true
*/
function generate_individual_random_price(
given_prices, predicted_prices, start, length, rate_min, rate_max) {
@ -104,8 +78,6 @@ function generate_individual_random_price(
rate_max *= RATE_MULTIPLIER;
const buy_price = given_prices[0];
const rate_range = [rate_min, rate_max];
let prob = 1;
for (let i = start; i < start + length; i++) {
let min_pred = get_price(rate_min, buy_price);
@ -113,14 +85,7 @@ function generate_individual_random_price(
if (!isNaN(given_prices[i])) {
if (given_prices[i] < min_pred - FUDGE_FACTOR || given_prices[i] > max_pred + FUDGE_FACTOR) {
// Given price is out of predicted range, so this is the wrong pattern
return 0;
}
if (given_prices[i] >= min_pred || given_prices[i] <= max_pred) {
// The value in the FUDGE_FACTOR range is ignored so that it don't give a probability 0.
const real_rate_range =
rate_range_from_given_and_base(given_prices[i], buy_price);
prob *= range_intersect_length(rate_range, real_rate_range) /
range_length(rate_range);
return false;
}
min_pred = given_prices[i];
max_pred = given_prices[i];
@ -131,144 +96,7 @@ function generate_individual_random_price(
max: max_pred,
});
}
return prob;
}
/*
* Probability Density Function of rates.
* Since the PDF is continuous*, we approximate it by a discrete probability function:
* the value in range [(x - 0.5), (x + 0.5)) has a uniform probability
* prob[x - value_start];
*
* Note that we operate all rate on the (* RATE_MULTIPLIER) scale.
*
* (*): Well not really since it only takes values that "float" can represent in some form, but the
* space is too large to compute directly in JS.
*/
class PDF {
/*
* Initialize a PDF in range [a, b], a and b can be non-integer.
* if uniform is true, then initialize the probability to be uniform, else initialize to a
* all-zero (invalid) PDF.
*/
constructor(a, b, uniform = true) {
this.value_start = Math.round(a);
this.value_end = Math.round(b);
const range = [a, b];
const total_length = range_length(range);
this.prob = Array(this.value_end - this.value_start + 1);
if (uniform) {
for (let i = 0; i < this.prob.length; i++) {
this.prob[i] =
range_intersect_length(this.range_of(i), range) / total_length;
}
}
}
range_of(idx) {
// TODO: consider doing the "exclusive end" properly.
return [this.value_start + idx - 0.5, this.value_start + idx + 0.5 - 1e-9];
}
min_value() {
return this.value_start - 0.5;
}
max_value() {
return this.value_end + 0.5 - 1e-9;
}
normalize() {
const total_probability = this.prob.reduce((acc, it) => acc + it, 0);
for (let i = 0; i < this.prob.length; i++) {
this.prob[i] /= total_probability;
}
}
/*
* Limit the values to be in the range, and return the probability that the value was in this
* range.
*/
range_limit(range) {
let [start, end] = range;
start = Math.max(start, this.min_value());
end = Math.min(end, this.max_value());
if (start >= end) {
// Set this to invalid values
this.value_start = this.value_end = 0;
this.prob = [];
return 0;
}
let prob = 0;
const start_idx = Math.round(start) - this.value_start;
const end_idx = Math.round(end) - this.value_start;
for (let i = start_idx; i <= end_idx; i++) {
const bucket_prob = this.prob[i] * range_intersect_length(this.range_of(i), range);
this.prob[i] = bucket_prob;
prob += bucket_prob;
}
this.prob = this.prob.slice(start_idx, end_idx + 1);
this.value_start = Math.round(start);
this.value_end = Math.round(end);
this.normalize();
return prob;
}
/*
* Subtract the PDF by a uniform distribution in [rate_decay_min, rate_decay_max]
*
* For simplicity, we assume that rate_decay_min and rate_decay_max are both integers.
*/
decay(rate_decay_min, rate_decay_max) {
const ret = new PDF(
this.min_value() - rate_decay_max, this.max_value() - rate_decay_min, false);
/*
// O(n^2) naive algorithm for reference, which would be too slow.
for (let i = this.value_start; i <= this.value_end; i++) {
const unit_prob = this.prob[i - this.value_start] / (rate_decay_max - rate_decay_min) / 2;
for (let j = rate_decay_min; j < rate_decay_max; j++) {
// ([i - 0.5, i + 0.5] uniform) - ([j, j + 1] uniform)
// -> [i - j - 1.5, i + 0.5 - j] with a triangular PDF
// -> approximate by
// [i - j - 1.5, i - j - 0.5] uniform &
// [i - j - 0.5, i - j + 0.5] uniform
ret.prob[i - j - 1 - ret.value_start] += unit_prob; // Part A
ret.prob[i - j - ret.value_start] += unit_prob; // Part B
}
}
*/
// Transform to "CDF"
for (let i = 1; i < this.prob.length; i++) {
this.prob[i] += this.prob[i - 1];
}
// Return this.prob[l - this.value_start] + ... + this.prob[r - 1 - this.value_start];
// This assume that this.prob is already transformed to "CDF".
const sum = (l, r) => {
l -= this.value_start;
r -= this.value_start;
if (l < 0) l = 0;
if (r > this.prob.length) r = this.prob.length;
if (l >= r) return 0;
return this.prob[r - 1] - (l == 0 ? 0 : this.prob[l - 1]);
};
for (let x = 0; x < ret.prob.length; x++) {
// i - j - 1 - ret.value_start == x (Part A)
// -> i = x + j + 1 + ret.value_start, j in [rate_decay_min, rate_decay_max)
ret.prob[x] = sum(x + rate_decay_min + 1 + ret.value_start, x + rate_decay_max + 1 + ret.value_start);
// i - j - ret.value_start == x (Part B)
// -> i = x + j + ret.value_start, j in [rate_decay_min, rate_decay_max)
ret.prob[x] += sum(x + rate_decay_min + ret.value_start, x + rate_decay_max + ret.value_start);
}
this.prob = ret.prob;
this.value_start = ret.value_start;
this.value_end = ret.value_end;
this.normalize();
}
return true;
}
/*
@ -280,38 +108,33 @@ class PDF {
* rate -= randfloat(rate_decay_min, rate_decay_max);
* }
*
* Would return the conditional probability given the given_prices, and modify
* the predicted_prices array.
* If the given_prices won't match, returns 0.
* Would modify the predicted_prices array.
* If the given_prices won't match, returns false, otherwise returns true
*/
function generate_decreasing_random_price(
given_prices, predicted_prices, start, length, start_rate_min,
start_rate_max, rate_decay_min, rate_decay_max) {
start_rate_min *= RATE_MULTIPLIER;
start_rate_max *= RATE_MULTIPLIER;
given_prices, predicted_prices, start, length, rate_min,
rate_max, rate_decay_min, rate_decay_max) {
rate_min *= RATE_MULTIPLIER;
rate_max *= RATE_MULTIPLIER;
rate_decay_min *= RATE_MULTIPLIER;
rate_decay_max *= RATE_MULTIPLIER;
const buy_price = given_prices[0];
let rate_pdf = new PDF(start_rate_min, start_rate_max);
let prob = 1;
for (let i = start; i < start + length; i++) {
let min_pred = get_price(rate_pdf.min_value(), buy_price);
let max_pred = get_price(rate_pdf.max_value(), buy_price);
let min_pred = get_price(rate_min, buy_price);
let max_pred = get_price(rate_max, buy_price);
if (!isNaN(given_prices[i])) {
if (given_prices[i] < min_pred - FUDGE_FACTOR || given_prices[i] > max_pred + FUDGE_FACTOR) {
// Given price is out of predicted range, so this is the wrong pattern
return 0;
return false;
}
if (given_prices[i] >= min_pred || given_prices[i] <= max_pred) {
// The value in the FUDGE_FACTOR range is ignored so that it don't give a probability 0.
const real_rate_range =
rate_range_from_given_and_base(given_prices[i], buy_price);
prob *= rate_pdf.range_limit(real_rate_range);
if (prob == 0) {
return 0;
}
// The value in the FUDGE_FACTOR range is ignored so the rate range would not be empty.
const real_rate_min = minimum_rate_from_given_and_base(given_prices[i], buy_price);
const real_rate_max = maximum_rate_from_given_and_base(given_prices[i], buy_price);
rate_min = Math.max(rate_min, real_rate_min);
rate_max = Math.min(rate_max, real_rate_max);
}
min_pred = given_prices[i];
max_pred = given_prices[i];
@ -322,9 +145,10 @@ function generate_decreasing_random_price(
max: max_pred,
});
rate_pdf.decay(rate_decay_min, rate_decay_max);
rate_min -= rate_decay_max;
rate_max -= rate_decay_min;
}
return prob;
return true;
}
@ -335,9 +159,8 @@ function generate_decreasing_random_price(
* sellPrices[work++] = intceil(rate * basePrice);
* sellPrices[work++] = intceil(randfloat(rate_min, rate) * basePrice) - 1;
*
* Would return the conditional probability given the given_prices, and modify
* the predicted_prices array.
* If the given_prices won't match, returns 0.
* Would modify the predicted_prices array.
* If the given_prices won't match, returns false, otherwise returns true
*/
function generate_peak_price(
given_prices, predicted_prices, start, rate_min, rate_max) {
@ -345,83 +168,15 @@ function generate_peak_price(
rate_max *= RATE_MULTIPLIER;
const buy_price = given_prices[0];
let prob = 1;
let rate_range = [rate_min, rate_max];
// * Calculate the probability first.
// Prob(middle_price)
const middle_price = given_prices[start + 1];
if (!isNaN(middle_price)) {
const min_pred = get_price(rate_min, buy_price);
const max_pred = get_price(rate_max, buy_price);
if (middle_price < min_pred - FUDGE_FACTOR || middle_price > max_pred + FUDGE_FACTOR) {
// Given price is out of predicted range, so this is the wrong pattern
return 0;
}
if (middle_price >= min_pred || middle_price <= max_pred) {
// The value in the FUDGE_FACTOR range is ignored so that it don't give a probability 0.
const real_rate_range =
rate_range_from_given_and_base(middle_price, buy_price);
prob *= range_intersect_length(rate_range, real_rate_range) /
range_length(rate_range);
if (prob == 0) {
return 0;
}
rate_range = range_intersect(rate_range, real_rate_range);
}
}
const left_price = given_prices[start];
const right_price = given_prices[start + 2];
// Prob(left_price | middle_price), Prob(right_price | middle_price)
//
// A = rate_range[0], B = rate_range[1], C = rate_min, X = rate, Y = randfloat(rate_min, rate)
// rate = randfloat(A, B); sellPrices[work++] = intceil(randfloat(C, rate) * basePrice) - 1;
//
// => X->U(A,B), Y->U(C,X), Y-C->U(0,X-A), Y-C->U(0,1)*(X-A), Y-C->U(0,1)*U(C-A,B-A),
// let Z=Y-C, Z1=C-A, Z2=B-A, Z->U(0,1)*U(Z1,Z2)
// Prob(Z>=t) = integral_{x=0}^{1} [min(t/x,Z2)-min(t/x,Z1)]/ (Z2-Z1)
// let F(t, ZZ) = integral_{x=0}^{1} min(t/x, ZZ)
// 1. if ZZ < t, then min(t/x, ZZ) = ZZ -> F(t, ZZ) = ZZ
// 2. if ZZ >= t, then F(t, ZZ) = integral_{x=0}^{t/ZZ} ZZ + integral_{x=t/ZZ}^{1} t/x
// = t - t/ZZ log(t/ZZ)
// Prob(Z>=t) = (F(t, Z2) - F(t, Z1)) / (Z2 - Z1)
// Prob(Y>=t) = Prob(Z>=t-C)
for (const price of [left_price, right_price]) {
if (isNaN(price)) {
continue;
}
const min_pred = get_price(rate_min, buy_price) - 1;
const max_pred = get_price(rate_range[1], buy_price) - 1;
if (price < min_pred - FUDGE_FACTOR || price > max_pred + FUDGE_FACTOR) {
// Given price is out of predicted range, so this is the wrong pattern
return 0;
}
if (price >= min_pred || price <= max_pred) {
// The value in the FUDGE_FACTOR range is ignored so that it don't give a probability 0.
const rate2_range = rate_range_from_given_and_base(price + 1, buy_price);
const F = (t, ZZ) => (ZZ < t ? ZZ : t - t / ZZ * Math.log(t / ZZ));
const [A, B] = rate_range;
const C = rate_min;
const Z1 = C - A;
const Z2 = B - A;
const PY = (t) => (F(t - C, Z2) - F(t - C, Z1)) / (Z2 - Z1);
prob *= PY(rate2_range[1]) - PY(rate2_range[0]);
if (prob == 0) {
return 0;
}
}
}
// * Then generate the real predicted range.
// We're doing things in different order then how we calculate probability,
// since forward prediction is more useful here.
//
// Main spike 1
min_pred = get_price(rate_min, buy_price) - 1;
max_pred = get_price(rate_max, buy_price) - 1;
if (!isNaN(given_prices[start])) {
if (given_prices[start] < min_pred - FUDGE_FACTOR || given_prices[peak_start + 2] > max_pred + FUDGE_FACTOR) {
// Given price is out of predicted range, so this is the wrong pattern
return false;
}
min_pred = given_prices[start];
max_pred = given_prices[start];
}
@ -432,8 +187,12 @@ function generate_peak_price(
// Main spike 2
min_pred = predicted_prices[start].min;
max_pred = get_price(rate_max, buy_price);
max_pred = intceil(2.0 * buy_price);
if (!isNaN(given_prices[start + 1])) {
if (given_prices[start + 1] < min_pred - FUDGE_FACTOR || given_prices[start + 1] > max_pred + FUDGE_FACTOR) {
// Given price is out of predicted range, so this is the wrong pattern
return false;
}
min_pred = given_prices[start + 1];
max_pred = given_prices[start + 1];
}
@ -443,9 +202,13 @@ function generate_peak_price(
});
// Main spike 3
min_pred = get_price(rate_min, buy_price) - 1;
min_pred = intceil(1.4 * buy_price) - 1;
max_pred = predicted_prices[start + 1].max - 1;
if (!isNaN(given_prices[start + 2])) {
if (given_prices[start + 2] < min_pred - FUDGE_FACTOR || given_prices[start + 2] > max_pred + FUDGE_FACTOR) {
// Given price is out of predicted range, so this is the wrong pattern
return false;
}
min_pred = given_prices[start + 2];
max_pred = given_prices[start + 2];
}
@ -454,7 +217,7 @@ function generate_peak_price(
max: max_pred,
});
return prob;
return true;
}
function*
@ -508,36 +271,31 @@ function*
max: buy_price,
},
];
let probability = 1;
// High Phase 1
probability *= generate_individual_random_price(
given_prices, predicted_prices, 2, high_phase_1_len, 0.9, 1.4);
if (probability == 0) {
if (!generate_individual_random_price(
given_prices, predicted_prices, 2, high_phase_1_len, 0.9, 1.4)) {
return;
}
// Dec Phase 1
probability *= generate_decreasing_random_price(
given_prices, predicted_prices, 2 + high_phase_1_len, dec_phase_1_len,
0.6, 0.8, 0.04, 0.1);
if (probability == 0) {
if (!generate_decreasing_random_price(
given_prices, predicted_prices, 2 + high_phase_1_len, dec_phase_1_len,
0.6, 0.8, 0.04, 0.1)) {
return;
}
// High Phase 2
probability *= generate_individual_random_price(given_prices, predicted_prices,
2 + high_phase_1_len + dec_phase_1_len, high_phase_2_len, 0.9, 1.4);
if (probability == 0) {
if (!generate_individual_random_price(given_prices, predicted_prices,
2 + high_phase_1_len + dec_phase_1_len, high_phase_2_len, 0.9, 1.4)) {
return;
}
// Dec Phase 2
probability *= generate_decreasing_random_price(
given_prices, predicted_prices,
2 + high_phase_1_len + dec_phase_1_len + high_phase_2_len,
dec_phase_2_len, 0.6, 0.8, 0.04, 0.1);
if (probability == 0) {
if (!generate_decreasing_random_price(
given_prices, predicted_prices,
2 + high_phase_1_len + dec_phase_1_len + high_phase_2_len,
dec_phase_2_len, 0.6, 0.8, 0.04, 0.1)) {
return;
}
@ -548,17 +306,16 @@ function*
const prev_length = 2 + high_phase_1_len + dec_phase_1_len +
high_phase_2_len + dec_phase_2_len;
probability *= generate_individual_random_price(
given_prices, predicted_prices, prev_length, 14 - prev_length, 0.9, 1.4);
if (probability == 0) {
if (!generate_individual_random_price(
given_prices, predicted_prices, prev_length, 14 - prev_length, 0.9,
1.4)) {
return;
}
yield {
pattern_description: "Fluctuating",
pattern_number: 0,
prices: predicted_prices,
probability,
prices: predicted_prices
};
}
@ -573,9 +330,7 @@ function* generate_pattern_0(given_prices) {
for (var dec_phase_1_len = 2; dec_phase_1_len < 4; dec_phase_1_len++) {
for (var high_phase_1_len = 0; high_phase_1_len < 7; high_phase_1_len++) {
for (var high_phase_3_len = 0; high_phase_3_len < (7 - high_phase_1_len - 1 + 1); high_phase_3_len++) {
yield* multiply_generator_probability(
generate_pattern_0_with_lengths(given_prices, high_phase_1_len, dec_phase_1_len, 7 - high_phase_1_len - high_phase_3_len, 5 - dec_phase_1_len, high_phase_3_len),
1 / (4 - 2) / 7 / (7 - high_phase_1_len));
yield* generate_pattern_0_with_lengths(given_prices, high_phase_1_len, dec_phase_1_len, 7 - high_phase_1_len - high_phase_3_len, 5 - dec_phase_1_len, high_phase_3_len);
}
}
}
@ -614,11 +369,10 @@ function* generate_pattern_1_with_peak(given_prices, peak_start) {
max: buy_price,
},
];
let probability = 1;
probability *= generate_decreasing_random_price(
given_prices, predicted_prices, 2, peak_start - 2, 0.85, 0.9, 0.03, 0.05);
if (probability == 0) {
if (!generate_decreasing_random_price(
given_prices, predicted_prices, 2, peak_start - 2, 0.85, 0.9, 0.03,
0.05)) {
return;
}
@ -626,24 +380,22 @@ function* generate_pattern_1_with_peak(given_prices, peak_start) {
min_randoms = [0.9, 1.4, 2.0, 1.4, 0.9, 0.4, 0.4, 0.4, 0.4, 0.4, 0.4]
max_randoms = [1.4, 2.0, 6.0, 2.0, 1.4, 0.9, 0.9, 0.9, 0.9, 0.9, 0.9]
for (let i = peak_start; i < 14; i++) {
probability *= generate_individual_random_price(
given_prices, predicted_prices, i, 1, min_randoms[i - peak_start],
max_randoms[i - peak_start]);
if (probability == 0) {
if (!generate_individual_random_price(
given_prices, predicted_prices, i, 1, min_randoms[i - peak_start],
max_randoms[i - peak_start])) {
return;
}
}
yield {
pattern_description: "Large spike",
pattern_number: 1,
prices: predicted_prices,
probability,
prices: predicted_prices
};
}
function* generate_pattern_1(given_prices) {
for (var peak_start = 3; peak_start < 10; peak_start++) {
yield* multiply_generator_probability(generate_pattern_1_with_peak(given_prices, peak_start), 1 / (10 - 3));
yield* generate_pattern_1_with_peak(given_prices, peak_start);
}
}
@ -672,19 +424,16 @@ function* generate_pattern_2(given_prices) {
max: buy_price,
},
];
let probability = 1;
probability *= generate_decreasing_random_price(
given_prices, predicted_prices, 2, 14 - 2, 0.85, 0.9, 0.03, 0.05);
if (probability == 0) {
if (!generate_decreasing_random_price(
given_prices, predicted_prices, 2, 14 - 2, 0.85, 0.9, 0.03, 0.05)) {
return;
}
yield {
pattern_description: "Decreasing",
pattern_number: 2,
prices: predicted_prices,
probability,
prices: predicted_prices
};
}
@ -733,30 +482,27 @@ function* generate_pattern_3_with_peak(given_prices, peak_start) {
];
let probability = 1;
probability *= generate_decreasing_random_price(
given_prices, predicted_prices, 2, peak_start - 2, 0.4, 0.9, 0.03, 0.05);
if (probability == 0) {
if (!generate_decreasing_random_price(
given_prices, predicted_prices, 2, peak_start - 2, 0.4, 0.9, 0.03,
0.05)) {
return;
}
// The peak
probability *= generate_individual_random_price(
given_prices, predicted_prices, peak_start, 2, 0.9, 1.4);
if (probability == 0) {
if (!generate_individual_random_price(
given_prices, predicted_prices, peak_start, 2, 0.9, 1.4)) {
return;
}
probability *= generate_peak_price(
given_prices, predicted_prices, peak_start + 2, 1.4, 2.0);
if (probability == 0) {
if (!generate_peak_price(
given_prices, predicted_prices, peak_start + 2, 1.4, 2.0)) {
return;
}
if (peak_start + 5 < 14) {
probability *= generate_decreasing_random_price(
given_prices, predicted_prices, peak_start + 5, 14 - (peak_start + 5),
0.4, 0.9, 0.03, 0.05);
if (probability == 0) {
if (!generate_decreasing_random_price(
given_prices, predicted_prices, peak_start + 5,
14 - (peak_start + 5), 0.4, 0.9, 0.03, 0.05)) {
return;
}
}
@ -764,60 +510,62 @@ function* generate_pattern_3_with_peak(given_prices, peak_start) {
yield {
pattern_description: "Small spike",
pattern_number: 3,
prices: predicted_prices,
probability,
prices: predicted_prices
};
}
function* generate_pattern_3(given_prices) {
for (let peak_start = 2; peak_start < 10; peak_start++) {
yield* multiply_generator_probability(generate_pattern_3_with_peak(given_prices, peak_start), 1 / (10 - 2));
for (var peak_start = 2; peak_start < 10; peak_start++) {
yield* generate_pattern_3_with_peak(given_prices, peak_start);
}
}
function get_transition_probability(previous_pattern) {
if (typeof previous_pattern === 'undefined' || Number.isNaN(previous_pattern) || previous_pattern === null || previous_pattern < 0 || previous_pattern > 3) {
// TODO: Fill the steady state pattern (https://github.com/mikebryant/ac-nh-turnip-prices/pull/90) here.
return [0.346278, 0.247363, 0.147607, 0.258752];
}
return PROBABILITY_MATRIX[previous_pattern];
}
function* generate_all_patterns(sell_prices, previous_pattern) {
const generate_pattern_fns = [generate_pattern_0, generate_pattern_1, generate_pattern_2, generate_pattern_3];
const transition_probability = get_transition_probability(previous_pattern);
for (let i = 0; i < 4; i++) {
yield* multiply_generator_probability(generate_pattern_fns[i](sell_prices), transition_probability[i]);
}
}
function* generate_possibilities(sell_prices, first_buy, previous_pattern) {
function* generate_possibilities(sell_prices, first_buy) {
if (first_buy || isNaN(sell_prices[0])) {
for (var buy_price = 90; buy_price <= 110; buy_price++) {
sell_prices[0] = sell_prices[1] = buy_price;
if (first_buy) {
yield* generate_pattern_3(sell_prices);
} else {
// All buy prices are equal probability and we're at the outmost layer,
// so don't need to multiply_generator_probability here.
yield* generate_all_patterns(sell_prices, previous_pattern)
yield* generate_pattern_0(sell_prices);
yield* generate_pattern_1(sell_prices);
yield* generate_pattern_2(sell_prices);
yield* generate_pattern_3(sell_prices);
}
}
} else {
yield* generate_all_patterns(sell_prices, previous_pattern)
yield* generate_pattern_0(sell_prices);
yield* generate_pattern_1(sell_prices);
yield* generate_pattern_2(sell_prices);
yield* generate_pattern_3(sell_prices);
}
}
function analyze_possibilities(sell_prices, first_buy, previous_pattern) {
const generated_possibilities = Array.from(generate_possibilities(sell_prices, first_buy, previous_pattern));
function row_probability(possibility, previous_pattern) {
return PROBABILITY_MATRIX[previous_pattern][possibility.pattern_number] / PATTERN_COUNTS[possibility.pattern_number];
}
const total_probability = generated_possibilities.reduce((acc, it) => acc + it.probability, 0);
for (const it of generated_possibilities) {
it.probability /= total_probability;
function get_probabilities(possibilities, previous_pattern) {
if (typeof previous_pattern === 'undefined' || Number.isNaN(previous_pattern) || previous_pattern === null || previous_pattern < 0 || previous_pattern > 3) {
return possibilities
}
var max_percent = possibilities.map(function (poss) {
return row_probability(poss, previous_pattern);
}).reduce(function (prev, current) {
return prev + current;
}, 0);
return possibilities.map(function (poss) {
poss.probability = row_probability(poss, previous_pattern) / max_percent;
return poss;
});
}
function analyze_possibilities(sell_prices, first_buy, previous_pattern) {
generated_possibilities = Array.from(generate_possibilities(sell_prices, first_buy));
generated_possibilities = get_probabilities(generated_possibilities, previous_pattern);
for (let poss of generated_possibilities) {
var weekMins = [];
var weekMaxes = [];

Loading…
Cancel
Save