trading_system_1.epl: improve inline documentation

This commit is contained in:
2018-07-23 21:45:35 -07:00
parent 18b52ed9eb
commit e3bb27029f

View File

@ -3,6 +3,10 @@
-- Statements must be separated by an empty line. -- Statements must be separated by an empty line.
--
-- Setup variables
--
-- The time the trading logic will begin to enter trades. -- The time the trading logic will begin to enter trades.
-- Exiting trades is 24/7. -- Exiting trades is 24/7.
create constant variable int StartTimeHour = 9 create constant variable int StartTimeHour = 9
@ -18,10 +22,10 @@ create constant variable string OHLCInterval = '10s'
-- Amount to be traded, measured in units. -- Amount to be traded, measured in units.
create constant variable int TradeSize = 100000 create constant variable int TradeSize = 100000
-- How many ticks to use for simple moving average calculation -- How many events to use for simple moving average calculation
create constant variable int SMASize = 5 create constant variable int SMASize = 5
-- How many ticks to store for Ref() access -- How many events to store for Ref() access
create constant variable int RefSize = 5 create constant variable int RefSize = 5
@ -29,8 +33,12 @@ create constant variable int RefSize = 5
-- A named window that contains the current tick -- A named window that contains the current tick
-- --
-- Define the window as length 1, using the structure of the TickEvent
-- java class to describe what the window contains.
create window CurrentTickWindow#length(1) as TickEvent create window CurrentTickWindow#length(1) as TickEvent
-- Describe how events get added to the window. This runs every time
-- a new TickEvent is posted.
insert into CurrentTickWindow select * from TickEvent insert into CurrentTickWindow select * from TickEvent
@ -53,8 +61,13 @@ on TickEvent as t set InTradingHours =
-- A stream of OHLC values calculated from TickEvents -- A stream of OHLC values calculated from TickEvents
-- --
-- Create the stream to contain OHLCEvents
create variant schema OHLCStream as OHLCEvent create variant schema OHLCStream as OHLCEvent
-- Send every TickEvent to the OHLC plugin. The plugin will post an
-- OHLCEvent to OHLCStream every OHLCInterval amount of time. It uses
-- TickEvent.time ("time") as the source of the timestamp, and uses
-- TickEvent.midDouble() as the value to use in the OHLC calculation.
insert into OHLCStream insert into OHLCStream
select * from TickEvent#OHLC(OHLCInterval, time, midDouble) select * from TickEvent#OHLC(OHLCInterval, time, midDouble)
@ -63,11 +76,16 @@ insert into OHLCStream
-- Simple moving average streams -- Simple moving average streams
-- --
-- Average the most recent OHLC close values and create events that -- SMACloseStream contains OHLCValueEvents. These are like
-- contain open, high, low, close and SMA(close). The number of -- OHLCEvents, but add an extra field for an arbitrary value. In this
-- OHLC events used in the SMA calc is set by SMASize. -- stream, that extra value will contain the average of OHLC close
-- values.
create schema SMACloseStream as ats.plugin.OHLCValueEvent create schema SMACloseStream as ats.plugin.OHLCValueEvent
-- Average the most recent OHLC close values from OHLCStream and
-- post an event that contains open, high, low, close, and
-- SMA(close). The number of OHLC events used in the SMA calc is set
-- by the SMASize variable.
insert into SMACloseStream insert into SMACloseStream
select new ats.plugin.OHLCValueEvent(time, open, high, low, close, Avg(close)) select new ats.plugin.OHLCValueEvent(time, open, high, low, close, Avg(close))
from OHLCStream#length(SMASize) from OHLCStream#length(SMASize)
@ -77,42 +95,60 @@ insert into SMACloseStream
-- ValueWhen calculations -- ValueWhen calculations
-- --
-- A stream that feeds B1 and B2 -- A stream that feeds B1 and B2. Each event contains a double
-- precision floating point value "low", and a timestamp called
-- "time".
create schema BStream as (low double, time org.joda.time.DateTime) create schema BStream as (low double, time org.joda.time.DateTime)
-- Find a bar with a lower close than its neighbors. -- Listen to the last "RefSize" number of SMACloseStream events. Look
-- Add that low value to the stream. -- for an OHLC bar with a lower average close than its neighbors. Add
-- that bar's low value and its timestamp to the stream. As described
-- in SMACloseStream, "value" in the query below represents
-- SMA(close).
insert into BStream insert into BStream
select prev(1, low) as low, prev(1, time) as time from SMACloseStream#length(RefSize) select prev(1, low) as low, prev(1, time) as time from SMACloseStream#length(RefSize)
where prev(0, value) > prev(1, value) where prev(0, value) > prev(1, value)
and prev(1, value) < prev(2, value) and prev(1, value) < prev(2, value)
-- Define B1 to contain the same fields as BStream
create schema B1 (low double, time org.joda.time.DateTime) create schema B1 (low double, time org.joda.time.DateTime)
-- B1 contains the most recent low value and time from BStream.
-- This is the last time an average close was lower
-- than the ones before and after.
-- Since the time is included in the event, no separate BT1 is needed.
insert into B1 select prev(0, low) as low, prev(0, time) as time insert into B1 select prev(0, low) as low, prev(0, time) as time
from BStream#length(RefSize) from BStream#length(RefSize)
-- B2 contains the *second* most recent occurrence in BStream, but is
-- otherwise the same as B1.
create schema B2 (low double, time org.joda.time.DateTime) create schema B2 (low double, time org.joda.time.DateTime)
insert into B2 select prev(1, low) as low, prev(1, time) as time insert into B2 select prev(1, low) as low, prev(1, time) as time
from BStream#length(RefSize) from BStream#length(RefSize)
-- A stream that feeds P1 and P2 -- A stream that feeds P1 and P2.
create schema PStream as (low double, time org.joda.time.DateTime) create schema PStream as (low double, time org.joda.time.DateTime)
-- Find a bar with a higher close than its neighbors. -- Find an OHLC bar with a higher average close than its neighbors.
-- Add that low value to the stream. -- Add that low value and its timestamp to the stream.
insert into PStream insert into PStream
select prev(1, low) as low, prev(1, time) as time from SMACloseStream#length(RefSize) select prev(1, low) as low, prev(1, time) as time from SMACloseStream#length(RefSize)
where prev(0, value) < prev(1, value) where prev(0, value) < prev(1, value)
and prev(1, value) > prev(2, value) and prev(1, value) > prev(2, value)
-- P1 contains the most recent low value and time from PStream.
-- This is the last time an average close was higher
-- than the ones before and after.
-- Since the time is included in the event, no separate PT1 is needed.
create schema P1 (low double, time org.joda.time.DateTime) create schema P1 (low double, time org.joda.time.DateTime)
insert into P1 select prev(0, low) as low, prev(0, time) as time insert into P1 select prev(0, low) as low, prev(0, time) as time
from PStream#length(RefSize) from PStream#length(RefSize)
-- P2 contains the second most recent occurrence in PStream.
create schema P2 (low double, time org.joda.time.DateTime) create schema P2 (low double, time org.joda.time.DateTime)
insert into P2 select prev(1, low) as low, prev(1, time) as time insert into P2 select prev(1, low) as low, prev(1, time) as time
@ -123,13 +159,21 @@ insert into P2 select prev(1, low) as low, prev(1, time) as time
-- Long entry -- Long entry
-- --
-- keep track of the highest OHLC high value from the most recent three -- A helper for LE calc. Keep track of the highest OHLC high value.
create window MaxHigh3Window#length(1) as (high double) create window MaxHigh3Window#length(1) as (high double)
-- Post the largest high value on OHLCStream from the most recent
-- three bars.
insert into MaxHigh3Window insert into MaxHigh3Window
select max(high) as high from OHLCStream#length(3) select max(high) as high from OHLCStream#length(3)
-- LE calc below is translated from:
-- Long entry events contain the current tick's midpoint value and
-- timestamp.
create schema LongEntryStream as (current BigDecimal, time org.joda.time.DateTime)
-- The long entry calc below is translated from this entry in the
-- spreadsheet:
-- --
-- LE = C > HHV(High,3) -- LE = C > HHV(High,3)
-- and B1 < B2 -- and B1 < B2
@ -138,8 +182,6 @@ insert into MaxHigh3Window
-- and BT2 > PT2 -- and BT2 > PT2
-- and PT1 > BT2 -- and PT1 > BT2
create schema LongEntryStream as (current BigDecimal, time org.joda.time.DateTime)
insert into LongEntryStream insert into LongEntryStream
select C.mid as current, C.time as time select C.mid as current, C.time as time
from CurrentTickWindow as C, from CurrentTickWindow as C,
@ -153,28 +195,40 @@ insert into LongEntryStream
and EPLHelpers.laterThan(B2.time, P2.time) and EPLHelpers.laterThan(B2.time, P2.time)
and EPLHelpers.laterThan(P1.time, B2.time) and EPLHelpers.laterThan(P1.time, B2.time)
-- Because multiple streams feed LongEntryStream (CurrentTickWindow,
-- MaxHigh3Window, B1, B2...), an event on any of those streams causes
-- the LongEntryStream logic above to be triggered. This often causes
-- multiple LongEntryStream events to be generated for a single tick
-- when several of the feeder streams are updated at the same time.
--
-- LongEntryDistinct filters out duplicate LongEntryStream events,
-- leaving a maximum of one event per tick.
create schema LongEntryDistinct as (current BigDecimal, time org.joda.time.DateTime) create schema LongEntryDistinct as (current BigDecimal, time org.joda.time.DateTime)
insert into LongEntryDistinct insert into LongEntryDistinct
select le.current as current, le.time as time select le.current as current, le.time as time
from pattern [every-distinct(le.time) le=LongEntryStream] from pattern [every-distinct(le.time) le=LongEntryStream]
-- The EsperProcessor java class has a listener for LongEntryDistinct
-- events. In the future it will place trades but at the moment it
-- just logs the event.
-- --
-- Event logging -- Event logging
-- --
-- TODO: look into LogSink http://esper.espertech.com/release-7.1.0/esper-reference/html/dataflow.html#dataflow-reference-logsink -- Log events consist of the stream name and an event description.
create schema LogStream as (stream string, event string) create schema LogStream as (stream string, event string)
-- Enable logging specific stream events by uncommenting individual -- Enable logging the events on specific streams by uncommenting
-- lines below: -- individual lines below. Depending on ongoing debugging needs, some
-- of these can be either helpful or too noisy. Comment/uncomment as
-- you see fit.
-- insert into LogStream select 'TickEvent' as stream, EPLHelpers.str(*) as event from TickEvent insert into LogStream select 'TickEvent' as stream, EPLHelpers.str(*) as event from TickEvent
-- insert into LogStream select 'OHLCStream' as stream, EPLHelpers.str(*) as event from OHLCStream insert into LogStream select 'OHLCStream' as stream, EPLHelpers.str(*) as event from OHLCStream
-- insert into LogStream select 'BStream' as stream, EPLHelpers.str(*) as event from BStream -- insert into LogStream select 'BStream' as stream, EPLHelpers.str(*) as event from BStream
@ -193,3 +247,6 @@ insert into LogStream select 'P2' as stream, EPLHelpers.str(*) as event from P2
-- insert into LogStream select 'LongEntryStream' as stream, EPLHelpers.str(*) as event from LongEntryStream -- insert into LogStream select 'LongEntryStream' as stream, EPLHelpers.str(*) as event from LongEntryStream
-- insert into LogStream select 'LongEntryDistinct' as stream, EPLHelpers.str(*) as event from LongEntryDistinct -- insert into LogStream select 'LongEntryDistinct' as stream, EPLHelpers.str(*) as event from LongEntryDistinct
-- TODO (for Seth): look into LogSink http://esper.espertech.com/release-7.1.0/esper-reference/html/dataflow.html#dataflow-reference-logsink