• API
• FAQ
• Tools
• Archive
SHARE
TWEET

# Untitled

a guest Dec 15th, 2019 119 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
1. '''
2. Simple algorithm to trade a single stock based upon certain rules.
3. The data is defined in the pipeline definition.
4. The selection logic is performed in the code.
5. '''
6.
7. # The following imports need to included when using Pipeline
8. from quantopian.algorithm import attach_pipeline, pipeline_output
9. from quantopian.pipeline import Pipeline, CustomFactor
10.
11. # Import all the built in Quantopian filters and factors (just in case)
12. import quantopian.pipeline.filters as Filters
13. import quantopian.pipeline.factors as Factors
14.
15. # Import Pandas and Numpy (just in case we want to use their functionality)
16. import pandas as pd
17. import numpy as np
18.
19. # Import any specialiazed packages here (eg scipy.optimize or scipy.stats)
20. pass
21.
22. # Import any needed datasets
23. from quantopian.pipeline.data.builtin import USEquityPricing
24.
25.
26. # Set any 'constants' you will be using
27. MY_STOCKS = symbols('AAPL')#, 'WFC', 'MSFT', 'AMZN', 'FB', 'XOM', 'C', 'UNH', 'DIS', 'PM',
28.                    #'T', 'KO', 'VZ', 'GE', 'WMT', 'BAC', 'PG', 'CVX', 'V', 'PFE')
29.
30. # Let's equally weight our 'potential' positions
31. # Note that this may not make the best use of cash because we will
32. # not be investing a positions 'share' when it doesn't pass the rules
33. WEIGHT = 1.0 / len(MY_STOCKS)
34.
35. def initialize(context):
36.     """
37.    Called once at the start of the algorithm.
38.    """
39.
40.     # Set commission model or omit and the default Q models will be used
43.
44.     # Attach the pipeline defined in my_pipe so we have data to use
45.     attach_pipeline(pipe_definition(context), name='my_data')
46.
47.     # Schedule when to trade.
49.
50.     # Schedule when to record any tracking data
51.     schedule_function(record_vars, date_rules.every_day(), time_rules.market_close())
52.
53.
54. def pipe_definition(context):
55.     '''
56.    Here is where the pipline definition is set.
57.    Specifically it defines which collumns appear in the resulting dataframe.
58.    Think of its defining a big spreadsheet (really a dataframe) of data.
59.    Don't think of the pipeline as doing any logic. That's later in the algo.
60.    '''
61.
62.     # Create a universe filter which defines our baseline set of securities
63.     # If no filter is used then ALL assets in the Q database will potentially be returned
64.     # This is not what one typically wants because
65.     #    1) it includes a mix of ETFs and stocks
66.     #    2) it includes very low liquid and 'penny' stocks
67.     #
68.     # This filter can also be used as a mask in factors to potentially speed up some calcs
69.     # Just want a single stock though so use the StaticAssets filter
70.     universe = Filters.StaticAssets(MY_STOCKS)
71.
72.     # Create any basic data factors that your logic will use.
73.     # This is done by simply using the 'latest' method on a datacolumn object.
74.     # Just ensure the dataset is imported first.
75.     close_price = USEquityPricing.close.latest
76.
77.     # Create any built in factors you want to use (in this case Returns).
78.     # Just ensure they are imported first.
79.     sma_15 = Factors.SimpleMovingAverage(inputs=[USEquityPricing.close], window_length=15, mask=universe)
81.
82.     # Create any custom factors you want to use
83.     # Just ensure they are defined somewhere in the code.
84.     pass
85.
86.     # Create any built in filters you want to use.
87.     pass
88.
89.     # Create any filters based upon factors defined above.
90.     # These are easily made with the built in methods such as '.top' etc applied to a factor
91.     pass
92.
93.     # Define the columns and any screen which we want our pipeline to return
94.     # This becomes the data that our algorithm will use to make trading decisions
95.     return Pipeline(
96.             columns = {
97.             'close_price' : close_price,
98.             'sma_15' : sma_15,
99.             },
100.             screen = universe,
101.             )
102.
103.
105.     '''
106.    Run pipeline_output to get the latest data for each security.
107.    The data is returned in a 2D pandas dataframe. Rows are the security objects.
108.    Columns are what was defined in the pipeline definition.
109.    '''
110.
111.     # Get a dataframe of our pipe data. Placed in the context object so it's available
112.     # to other functions and methods (quasi global)
113.     context.output = pipeline_output('my_data')
114.
115.
117.     '''
118.    This is a scheduled function to execute all buys and sells
119.    '''
120.     # Note that no logic was done in the pipeline. Just fetched the data.
121.     # Here is where you can filter, sort, and do whatever you want with that data.
122.     # Anything that could have been done in pipeline can be done with the
123.     # dataframe that it returns. Use the pandas methods on context.output.
124.
125.
126.
127.     open_rules = 'close_price > sma_15'
128.     open_these = context.output.query(open_rules).index.tolist()
129.
130.     for stock in open_these:
131.         if stock not in context.portfolio.positions and data.can_trade(stock):
132.             order_target_percent(stock, WEIGHT)
133.
134.
135.     close_rules = 'close_price < sma_15'
136.     close_these = context.output.query(close_rules).index.tolist()
137.
138.     for stock in close_these:
139.         if stock in context.portfolio.positions and data.can_trade(stock):
140.             order_target_percent(stock, 0)
141.
142.
143.
144. def record_vars(context, data):
145.     """
146.    Plot variables at the end of each day.
147.    """
148.
149.     # Record the number of positions held each day
150.     record(leverage=context.account.leverage,
151.            positions=len(context.portfolio.positions))
RAW Paste Data
We use cookies for various purposes including analytics. By continuing to use Pastebin, you agree to our use of cookies as described in the Cookies Policy.

Top